In [1]:
import glob
import matplotlib.pyplot as plt
import pickle
import lmfit
import numpy as np
import scipy.stats as ss
plt.style.use('seaborn-white')
import statsmodels.api as sm
from statsmodels.sandbox.regression.predstd import wls_prediction_std
from statsmodels.formula.api import ols, rlm
import sys
sys.path.append('../')
from Linearity import Neuron
In [33]:
filename = '/media/sahil/NCBS_Shares_BGStim/patch_data/161013/c1/plots/c1.pkl'
In [34]:
control_result2_rsquared_adj = []
control_result1_rsquared_adj = []
control_var_expected = []
gabazine_result2_rsquared_adj = []
gabazine_result1_rsquared_adj = []
gabazine_var_expected = []
tolerance = 5e-4
In [41]:
def linearModel(x, a=1, b=1):
    # Linear model
    return (a*x+b)

def DN_model(x,a=1,b=1):
    # Divisive normalization model
    #return x - a*(x**2)/(b+x)
    return x/(a*x+b)

def DN_1_model(x,a=1):
    # Divisive normalization model
    #return x - a*(x**2)/(b+x)
    return a*x/(x+1)

def DN_2_model(x,a=1,b=1):
    # Divisive normalization model
    #return x - a*(x**2)/(b+x)
    return x/(a*x+b)
In [42]:
lin_aic = []
dn_aic = []
lin_chi = []
dn_chi = []

control_observed = {}
control_observed_average = {}
gabazine_observed ={}
gabazine_observed_average = {}
control_expected = {}
control_expected_average = {}
gabazine_expected ={}
gabazine_expected_average = {}
feature = 0

neuron = Neuron.load(filename)
for expt in neuron.experiment:
    print ("Starting expt {}".format(expt))
    for numSquares in neuron.experiment[expt].keys(): 
        print ("Square {}".format(numSquares))
        if not numSquares == 1:
            nSquareData = neuron.experiment[expt][numSquares]
            if expt == "Control":
                coords_C = nSquareData.coordwise
                for coord in coords_C: 
                    if feature in coords_C[coord].feature:
                        control_observed_average.update({coord: coords_C[coord].average_feature[feature]})
                        control_expected_average.update({coord: coords_C[coord].expected_feature[feature]})
                        control_observed.update({coord: []})
                        control_expected.update({coord: []})
                        for trial in coords_C[coord].trials:
                            if feature in trial.feature:
                                control_observed[coord].append(trial.feature[feature])
                                control_expected[coord].append(coords_C[coord].expected_feature[feature])
            elif expt == "GABAzine":
                coords_I = nSquareData.coordwise
                for coord in coords_I: 
                    if feature in coords_I[coord].feature:
                        gabazine_observed.update({coord: []})
                        gabazine_expected.update({coord: []})
                        gabazine_observed_average.update({coord: coords_I[coord].average_feature[feature]})
                        gabazine_expected_average.update({coord: coords_I[coord].expected_feature[feature]})

                        for trial in coords_I[coord].trials:
                            if feature in trial.feature:
                                gabazine_observed[coord].append(trial.feature[feature])
                                gabazine_expected[coord].append(coords_I[coord].expected_feature[feature])
print ("Read {} into variables".format(filename))
Starting expt Control
Square 1
Square 2
Square 3
Square 5
Square 7
Square 9
Starting expt GABAzine
Square 1
Square 2
Square 3
Square 5
Square 7
Square 9
Read /media/sahil/NCBS_Shares_BGStim/patch_data/161013/c1/plots/c1.pkl into variables
In [43]:
list_control_observed   = []  
list_gabazine_observed  = []
list_control_expected   = []
list_gabazine_expected  = []

if len(gabazine_observed):
    for key in gabazine_observed.keys():
        for element1, element2 in zip(gabazine_observed[key], gabazine_expected[key] ):
            if not (element1 <0 or np.isclose(element1, 0, atol=tolerance) or element2<0 or np.isclose(element2, 0, atol=tolerance)):
                list_gabazine_observed.append(element1)
                list_gabazine_expected.append(element2)

if len(control_observed):
    for key in control_observed.keys():
        for element1, element2 in zip(control_observed[key], control_expected[key] ):
            if not (element1 <0 or np.isclose(element1, 0, atol=tolerance) or element2<0 or np.isclose(element2, 0, atol=tolerance)):
                list_control_observed.append(element1)
                list_control_expected.append(element2)
In [44]:
list_control_observed   = []  
list_gabazine_observed  = []
list_control_expected   = []
list_gabazine_expected  = []

if len(gabazine_observed_average):
    for key in gabazine_observed_average.keys():
        element1, element2 = gabazine_observed_average[key], gabazine_expected_average[key]
        if not (element1 <0 or np.isclose(element1, 0, atol=tolerance) or element2<0 or np.isclose(element2, 0, atol=tolerance)):
            list_gabazine_observed.append(element1)
            list_gabazine_expected.append(element2)

if len(control_observed_average):
    for key in control_observed_average.keys():
        element1, element2 = control_observed_average[key], control_expected_average[key]
        if not (element1 <0 or np.isclose(element1, 0, atol=tolerance) or element2<0 or np.isclose(element2, 0, atol=tolerance)):
            list_control_observed.append(element1)
            list_control_expected.append(element2)
In [45]:
minPoints = 10
minIQR = 3
if len(list_control_expected)>minPoints and len(list_control_observed)> minPoints and ss.iqr(list_control_expected)>minIQR:
    print ("Control")
    X = np.array(list_control_expected)
    y = np.array(list_control_observed)
    idx   = np.argsort(X)
    X = X[idx]
    y = y[idx]

    linear_Model = lmfit.Model(linearModel)
    DN_Model = lmfit.Model(DN_model)

    lin_pars = linear_Model.make_params()
    lin_result = linear_Model.fit(y, lin_pars, x=X)
    lin_aic.append(lin_result.aic)
    lin_chi.append(lin_result.redchi)

    DN_pars = DN_Model.make_params()
    DN_result = DN_Model.fit(y, DN_pars, x=X)
    dn_aic.append(DN_result.aic)
    dn_chi.append(DN_result.redchi)

    print (lin_result.fit_report())
    print (DN_result.fit_report())

    ax = plt.subplot(111)
    ax.scatter(X, y, alpha=0.2)
    ax.set_xlim(xmin=0.)
    ax.set_ylim(ymin=0.)
    ax.set_xlabel("Expected")
    ax.set_ylabel("Observed")
    ax.set_title("Divisive Normalization and Inhibition fits")
    ax.plot(X, lin_result.best_fit, '-', label="Divisive Inhibition Model")
    ax.plot(X, DN_result.best_fit, '-', label="Divisive Normalization Model")
    plt.legend()
    plt.show()
Control
[[Model]]
    Model(linearModel)
[[Fit Statistics]]
    # function evals   = 8
    # data points      = 120
    # variables        = 2
    chi-square         = 27.620
    reduced chi-square = 0.234
    Akaike info crit   = -172.272
    Bayesian info crit = -166.697
[[Variables]]
    a:   0.14869098 +/- 0.015624 (10.51%) (init= 1)
    b:   1.07896469 +/- 0.094868 (8.79%) (init= 1)
[[Correlations]] (unreported correlations are <  0.100)
    C(a, b)                      = -0.885 

[[Model]]
    Model(DN_model)
[[Fit Statistics]]
    # function evals   = 25
    # data points      = 120
    # variables        = 2
    chi-square         = 24.535
    reduced chi-square = 0.208
    Akaike info crit   = -186.488
    Bayesian info crit = -180.913
[[Variables]]
    a:   0.32561263 +/- 0.021882 (6.72%) (init= 1)
    b:   0.90515769 +/- 0.118890 (13.13%) (init= 1)
[[Correlations]] (unreported correlations are <  0.100)
    C(a, b)                      = -0.882 

In [46]:
if len(list_gabazine_expected)>minPoints and len(list_gabazine_observed)>minPoints and ss.iqr(list_gabazine_expected)>minIQR :

    print ("GABAzine")
    X = np.array(list_gabazine_expected)
    y = np.array(list_gabazine_observed)
    
    idx   = np.argsort(X)
    X = X[idx]
    y = y[idx]

    linear_Model = lmfit.Model(linearModel)
    DN_Model = lmfit.Model(DN_model)

    lin_pars = linear_Model.make_params()
    lin_result = linear_Model.fit(y, lin_pars, x=X)
    lin_aic.append(lin_result.aic)
    lin_chi.append(lin_result.redchi)

    DN_pars = DN_Model.make_params()
    DN_result = DN_Model.fit(y, DN_pars, x=X)
    dn_aic.append(DN_result.aic)
    dn_chi.append(DN_result.redchi)

    print (lin_result.fit_report())
    print (DN_result.fit_report())

    ax = plt.subplot(111)
    ax.scatter(X, y, alpha=0.2)
    ax.set_xlim(xmin=0.)
    ax.set_ylim(ymin=0.)
    ax.set_xlabel("Expected")
    ax.set_ylabel("Observed")
    ax.set_title("Divisive Normalization and Inhibition fits")
    ax.plot(X, lin_result.best_fit, '-', label="Divisive Inhibition Model")
    ax.plot(X, DN_result.best_fit, '-', label="Divisive Normalization Model")
    plt.legend()
    plt.show()
GABAzine
[[Model]]
    Model(linearModel)
[[Fit Statistics]]
    # function evals   = 8
    # data points      = 120
    # variables        = 2
    chi-square         = 60.446
    reduced chi-square = 0.512
    Akaike info crit   = -78.290
    Bayesian info crit = -72.715
[[Variables]]
    a:   0.59123808 +/- 0.019180 (3.24%) (init= 1)
    b:   0.74896014 +/- 0.133881 (17.88%) (init= 1)
[[Correlations]] (unreported correlations are <  0.100)
    C(a, b)                      = -0.873 

[[Model]]
    Model(DN_model)
[[Fit Statistics]]
    # function evals   = 39
    # data points      = 120
    # variables        = 2
    chi-square         = 52.428
    reduced chi-square = 0.444
    Akaike info crit   = -95.367
    Bayesian info crit = -89.792
[[Variables]]
    a:   0.04207642 +/- 0.005774 (13.72%) (init= 1)
    b:   1.07638476 +/- 0.050941 (4.73%) (init= 1)
[[Correlations]] (unreported correlations are <  0.100)
    C(a, b)                      = -0.935 

In [23]:
list_control_observed   = []  
list_gabazine_observed  = []
list_control_expected   = []
list_gabazine_expected  = []

if len(gabazine_observed_average) and len(control_observed_average):
    for key in list(set(gabazine_observed_average).intersection(set(control_observed_average))):
        element1, element2 =  gabazine_observed_average[key], control_observed_average[key]
        if not (element1 <0 or np.isclose(element1, 0, atol=tolerance) or element2<0 or np.isclose(element2, 0, atol=tolerance)):
            list_gabazine_observed.append(element1)
            list_control_observed.append(element2)
set([frozenset([(10, 8), (5, 7)]), frozenset([(4, 2), (5, 5)]), frozenset([(7, 5), (7, 7), (6, 6)]), frozenset([(6, 2), (6, 10)]), frozenset([(6, 6), (4, 8), (5, 7), (7, 7), (8, 8), (7, 5), (3, 7)]), frozenset([(8, 10), (2, 4)]), frozenset([(3, 7), (8, 8)]), frozenset([(6, 4), (8, 8), (6, 6)]), frozenset([(7, 5), (6, 6)]), frozenset([(9, 3), (7, 5), (6, 6)]), frozenset([(6, 4), (7, 5), (6, 6), (7, 7), (9, 7)]), frozenset([(8, 6), (7, 7)]), frozenset([(5, 7), (7, 7)]), frozenset([(6, 6), (4, 8), (2, 8), (5, 7), (7, 7), (8, 8), (3, 7)]), frozenset([(2, 8), (5, 7), (3, 7)]), frozenset([(6, 4), (5, 7)]), frozenset([(3, 7), (9, 3), (6, 6), (8, 10), (5, 7)]), frozenset([(9, 5), (3, 3)]), frozenset([(8, 8), (7, 7), (4, 8)]), frozenset([(2, 8), (7, 7), (8, 6)]), frozenset([(7, 7), (6, 6)]), frozenset([(4, 10), (7, 7), (3, 7)]), frozenset([(4, 10), (6, 8)]), frozenset([(6, 4), (5, 3)]), frozenset([(8, 8), (4, 6), (6, 6)]), frozenset([(8, 8), (4, 8)]), frozenset([(7, 3), (8, 8), (6, 6), (7, 7), (10, 8)]), frozenset([(2, 6), (6, 6), (4, 8)]), frozenset([(4, 10), (6, 10), (7, 5), (6, 6), (4, 8)]), frozenset([(3, 7), (7, 5), (3, 9), (6, 6), (5, 7)]), frozenset([(6, 4), (6, 6), (10, 4), (4, 8), (5, 7), (3, 7), (7, 9)]), frozenset([(10, 8), (4, 6), (2, 8), (5, 7), (7, 7), (8, 8), (7, 5)]), frozenset([(8, 8), (8, 10), (6, 6)]), frozenset([(7, 5), (4, 4)]), frozenset([(8, 8), (6, 6), (4, 8)]), frozenset([(3, 7), (6, 6)]), frozenset([(8, 8), (6, 8), (7, 7), (7, 9), (5, 3)]), frozenset([(8, 8), (6, 6), (5, 7)]), frozenset([(8, 8), (7, 7)]), frozenset([(8, 2), (3, 5)]), frozenset([(7, 5), (9, 7)]), frozenset([(6, 4), (5, 3), (7, 5), (3, 3), (3, 7)]), frozenset([(8, 8), (6, 8), (6, 6)]), frozenset([(4, 10), (6, 4)]), frozenset([(9, 3), (5, 7), (6, 6), (7, 7), (8, 2)]), frozenset([(6, 6), (3, 3), (9, 7)]), frozenset([(7, 9), (6, 6), (4, 8)]), frozenset([(10, 4), (7, 7)]), frozenset([(3, 7), (6, 6), (8, 4)]), frozenset([(3, 7), (8, 8), (6, 6)]), frozenset([(3, 9), (6, 6), (5, 7)]), frozenset([(4, 4), (8, 2)]), frozenset([(3, 7), (8, 8), (6, 6), (7, 7), (4, 8)]), frozenset([(6, 6), (3, 7), (7, 5), (10, 6), (5, 7)]), frozenset([(6, 8), (7, 9)]), frozenset([(4, 10), (6, 6), (4, 8), (10, 6), (7, 5), (8, 6), (3, 7)]), frozenset([(7, 5), (6, 6), (8, 8)]), frozenset([(8, 4), (3, 5)])])
In [25]:
print ("GABAzine and control")
#X = np.array(list_gabazine_expected)
#y = np.array(list_gabazine_observed)

y = np.array(list_control_observed)
X = np.array(list_gabazine_observed)

idx   = np.argsort(X)
X = X[idx]
y = y[idx]

linear_Model = lmfit.Model(linearModel)
DN_Model = lmfit.Model(DN_model)

lin_pars = linear_Model.make_params()
lin_result = linear_Model.fit(y, lin_pars, x=X)
lin_aic.append(lin_result.aic)
lin_chi.append(lin_result.redchi)

DN_pars = DN_Model.make_params()
DN_result = DN_Model.fit(y, DN_pars, x=X)
dn_aic.append(DN_result.aic)
dn_chi.append(DN_result.redchi)

print (lin_result.fit_report())
print (DN_result.fit_report())

ax = plt.subplot(111)
ax.scatter(X, y, alpha=0.2)
ax.set_xlim(xmin=0.)
ax.set_ylim(ymin=0.)
ax.set_xlabel("Expected")
ax.set_ylabel("Observed")
ax.set_title("Divisive Normalization and Inhibition fits")
ax.plot(X, lin_result.best_fit, '-', label="Divisive Inhibition Model")
ax.plot(X, DN_result.best_fit, '-', label="Divisive Normalization Model")
plt.show()
GABAzine and control
[[Model]]
    Model(linearModel)
[[Fit Statistics]]
    # function evals   = 6
    # data points      = 58
    # variables        = 1
    chi-square         = 30.173
    reduced chi-square = 0.529
    Akaike info crit   = -35.902
    Bayesian info crit = -33.842
[[Variables]]
    a:   0.70593719 +/- 0.019002 (2.69%) (init= 1)
[[Correlations]] (unreported correlations are <  0.100)

[[Model]]
    Model(DN_model)
[[Fit Statistics]]
    # function evals   = 23
    # data points      = 58
    # variables        = 1
    chi-square         = 19.444
    reduced chi-square = 0.341
    Akaike info crit   = -61.389
    Bayesian info crit = -59.329
[[Variables]]
    a:   0.07593467 +/- 0.005551 (7.31%) (init= 1)
[[Correlations]] (unreported correlations are <  0.100)

In [ ]: